Let \(X_t\) be the true concentration of chlorophyll-a on day \(t\) and \(Y_t\) be the measured value on day \(t\).
These are all our covariates:
library(rjags)
## Loading required package: coda
## Linked to JAGS 4.3.2
## Loaded modules: basemod,bugs
library(ecoforecastR)
load("cleaned_data.RData")
Data model: \[ Y_t \sim N(X_t, \tau_\text{obs}) \]
Process model: \[ X_{t+1} \sim N(X_t, \tau_\text{add}) \]
Priors: \[ X_1 \sim N(X_{ic}, \tau_{ic}) \] \[ \tau_\text{obs} \sim \text{Gamma}(a_\text{obs}, r_\text{obs}) \] \[ \tau_\text{add} \sim \text{Gamma}(a_\text{add}, r_\text{add}) \]
source("fit_random_walk.R")
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 2009
## Unobserved stochastic nodes: 3479
## Total graph size: 5495
##
## Initializing model
# Discard burn-in
rwalk.params <- window(rwalk.jags.out[,1:2], start = 1000)
# Plot and summarize
plot(rwalk.params)
summary(rwalk.params)
##
## Iterations = 1000:3000
## Thinning interval = 1
## Number of chains = 3
## Sample size per chain = 2001
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## tau_add 4.486 0.2189 0.002825 0.009822
## tau_obs 23.854 2.7901 0.036011 0.192579
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## tau_add 4.083 4.336 4.475 4.626 4.944
## tau_obs 18.733 21.904 23.757 25.597 29.817
cor(as.matrix(rwalk.params))
## tau_add tau_obs
## tau_add 1.0000000 -0.5567482
## tau_obs -0.5567482 1.0000000
pairs(as.matrix(rwalk.params))
## confidence interval
time.rng = c(1, nrow(cleaned_data)) ## you can adjust this line to zoom in and out on specific time intervals
# Get posterior predictions from the random walk model
x.cols <- grep("^x",colnames(rwalk.out)) ## grab all columns that start with the letter x
ci_rwalk <- apply(rwalk.out[,x.cols], 2, quantile, c(0.025, 0.5, 0.975))
# Plot the time series with confidence intervals
plot(cleaned_data$datetime, ci_rwalk[2,], type='n', ylim=range(cleaned_data$chla, na.rm=TRUE),
xlim=cleaned_data$datetime[time.rng],
xlab = "Time",
ylab = "Chlorophyll-a",
main = "Random Walk Model")
# Adjust x-axis label to be monthly if zoomed
if(diff(time.rng) < 100){
axis.Date(1, at=seq(cleaned_data$datetime[time.rng[1]], cleaned_data$datetime[time.rng[2]], by='month'), format = "%Y-%m")
}
# Add the confidence envelope
ecoforecastR::ciEnvelope(cleaned_data$datetime, ci_rwalk[1,], ci_rwalk[3,], col=ecoforecastR::col.alpha("lightBlue", 0.75))
# Plot the original data points
points(cleaned_data$datetime, cleaned_data$chla, pch="+", cex=0.5)
Data model: \[ Y_t \sim N(X_t, \tau_\text{obs}) \]
Process model: \[ X_{t} \sim N(X_{t-365}, \tau_\text{add}) \]
Priors: \[ X_1 \sim N(X_{ic}, \tau_{ic}) \] \[ \tau_\text{obs} \sim \text{Gamma}(a_\text{obs}, r_\text{obs}) \] \[ \tau_\text{add} \sim \text{Gamma}(a_\text{add}, r_\text{add}) \]
source("fit_previous_year_model.R")
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 2009
## Unobserved stochastic nodes: 3479
## Total graph size: 5495
##
## Initializing model
# Discard burn-in
pyear.params <- window(pyear.jags.out[,1:2], start = 1000)
# Plot and summarize
plot(pyear.params)
summary(pyear.params)
##
## Iterations = 1000:3000
## Thinning interval = 1
## Number of chains = 3
## Sample size per chain = 2001
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## tau_add 11.6143 2.27369 0.029346 0.3217167
## tau_obs 0.5364 0.01991 0.000257 0.0007283
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## tau_add 7.9327 9.9850 11.4240 12.8751 16.8215
## tau_obs 0.4985 0.5229 0.5361 0.5496 0.5764
cor(as.matrix(pyear.params))
## tau_add tau_obs
## tau_add 1.0000000 -0.1514624
## tau_obs -0.1514624 1.0000000
pairs(as.matrix(pyear.params))
## confidence interval
time.rng = c(1, nrow(cleaned_data)) ## you can adjust this line to zoom in and out on specific time intervals
# Get posterior predictions from the random walk model
x.cols <- grep("^x",colnames(pyear.out)) ## grab all columns that start with the letter x
ci_pyear <- apply(pyear.out[,x.cols], 2, quantile, c(0.025, 0.5, 0.975))
# Plot the time series with confidence intervals
plot(cleaned_data$datetime, ci_pyear[2,], type='n', ylim=range(cleaned_data$chla, na.rm=TRUE),
xlim=cleaned_data$datetime[time.rng],
xlab = "Time",
ylab = "Chlorophyll-a",
main = "Previous Year's Chlorophyll-A Model")
# Adjust x-axis label to be monthly if zoomed
if(diff(time.rng) < 100){
axis.Date(1, at=seq(cleaned_data$datetime[time.rng[1]], cleaned_data$datetime[time.rng[2]], by='month'), format = "%Y-%m")
}
# Add the confidence envelope
ecoforecastR::ciEnvelope(cleaned_data$datetime, ci_pyear[1,], ci_pyear[3,], col=ecoforecastR::col.alpha("lightBlue", 0.75))
# Plot the original data points
points(cleaned_data$datetime, cleaned_data$chla, pch="+", cex=0.5)
Data model: \[ Y_t \sim N(X_t, \tau_\text{obs}) \]
Process model: \[ X_{t+1} \sim N(X_t + \beta_0 + \beta_{DO} Z_{DO, t} + \beta_{pH}Z_{pH, t} + \beta_{\text{turb}}Z_{\text{turb}, t} + \beta_X X_t, \tau_\text{add}) \]
Priors: \[ X_1 \sim N(X_{ic}, \tau_{ic}) \] \[ \tau_\text{obs} \sim \text{Gamma}(a_\text{obs}, r_\text{obs}) \] \[ \tau_\text{add} \sim \text{Gamma}(a_\text{add}, r_\text{add}) \]
internal_model <- ecoforecastR::fit_dlm(model = list(obs = "chla", fixed = "1 + X + oxygen + daily_pH + daily_turbidity"), cleaned_data)
## [1] " \n model{\n \n #### Priors\n x[1] ~ dnorm(x_ic,tau_ic)\n tau_obs ~ dgamma(a_obs,r_obs)\n tau_add ~ dgamma(a_add,r_add)\n\n #### Random Effects\n #RANDOM tau_alpha~dgamma(0.1,0.1)\n #RANDOM for(i in 1:nrep){ \n #RANDOM alpha[i]~dnorm(0,tau_alpha)\n #RANDOM }\n\n #### Fixed Effects\n betaX ~dnorm(0,0.001)\n betaIntercept~dnorm(0,0.001)\n betaoxygen~dnorm(0,0.001)\n betadaily_pH~dnorm(0,0.001)\n betadaily_turbidity~dnorm(0,0.001)\n for(j in 1: 4 ){\n muXf[j] ~ dnorm(0,0.001)\n tauXf[j] ~ dgamma(0.01,0.01)\n }\n\n \n #### Data Model\n for(t in 1:n){\n OBS[t] ~ dnorm(x[t],tau_obs)\n Xf[t,1] ~ dnorm(muXf[1],tauXf[1])\nXf[t,2] ~ dnorm(muXf[2],tauXf[2])\nXf[t,3] ~ dnorm(muXf[3],tauXf[3])\nXf[t,4] ~ dnorm(muXf[4],tauXf[4])\n }\n \n #### Process Model\n for(t in 2:n){\n mu[t] <- x[t-1] + betaX*x[t-1] + betaIntercept*Xf[t,1] + betaoxygen*Xf[t,2] + betadaily_pH*Xf[t,3] + betadaily_turbidity*Xf[t,4]\n x[t]~dnorm(mu[t],tau_add)\n }\n\n }"
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 10894
## Unobserved stochastic nodes: 5579
## Total graph size: 30120
##
## Initializing model
#names(internal.model)
## parameter diagnostics
params_internal <- window(internal_model$params,start=1000) ## remove burn-in
plot(params_internal)
cor(as.matrix(params_internal))
## betaIntercept betaX betadaily_pH betadaily_turbidity
## betaIntercept 1.00000000 -0.35213831 -0.84742258 0.251640769
## betaX -0.35213831 1.00000000 0.02876006 -0.130903265
## betadaily_pH -0.84742258 0.02876006 1.00000000 -0.165008656
## betadaily_turbidity 0.25164077 -0.13090326 -0.16500866 1.000000000
## betaoxygen -0.49855553 0.48112986 -0.02476056 -0.207375133
## tau_add -0.04806632 0.17493661 -0.01301224 0.009374721
## tau_obs -0.05833468 -0.17823221 0.12928302 -0.032728856
## betaoxygen tau_add tau_obs
## betaIntercept -0.49855553 -0.048066317 -0.05833468
## betaX 0.48112986 0.174936612 -0.17823221
## betadaily_pH -0.02476056 -0.013012238 0.12928302
## betadaily_turbidity -0.20737513 0.009374721 -0.03272886
## betaoxygen 1.00000000 0.086840553 -0.07082428
## tau_add 0.08684055 1.000000000 -0.56427058
## tau_obs -0.07082428 -0.564270581 1.00000000
pairs(as.matrix(params_internal))
summary(params_internal)
##
## Iterations = 1000:5000
## Thinning interval = 1
## Number of chains = 3
## Sample size per chain = 4001
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## betaIntercept 0.506484 0.158836 1.450e-03 2.813e-02
## betaX -0.068784 0.007217 6.587e-05 4.978e-04
## betadaily_pH 0.016684 0.024781 2.262e-04 3.731e-03
## betadaily_turbidity 0.002797 0.002533 2.312e-05 6.186e-05
## betaoxygen -0.059973 0.010032 9.157e-05 1.053e-03
## tau_add 3.985539 0.173103 1.580e-03 1.003e-02
## tau_obs 61.685138 17.109376 1.562e-01 2.096e+00
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## betaIntercept 0.200531 0.3903111 0.513569 0.615929 0.827541
## betaX -0.082741 -0.0736821 -0.068908 -0.064015 -0.054399
## betadaily_pH -0.032163 0.0004725 0.015668 0.032710 0.065956
## betadaily_turbidity -0.002181 0.0010916 0.002795 0.004504 0.007746
## betaoxygen -0.080091 -0.0671555 -0.059633 -0.052517 -0.042157
## tau_add 3.668593 3.8653437 3.975876 4.096812 4.347602
## tau_obs 37.008342 49.6362131 58.976364 69.361340 105.541650
time.rng = c(1,nrow(cleaned_data)) ## you can adjust this line to zoom in and out on specific time intervals
out <- as.matrix(internal_model$predict)
ci <- apply(out,2,quantile,c(0.025,0.5,0.975))
plot(cleaned_data$datetime,ci[2,],type='n',ylim=range(cleaned_data$chla,na.rm=TRUE),
#log='y',
xlim=cleaned_data$datetime[time.rng],
xlab = "Time",
ylab = "Chlorophyll-a",
main = "Internal Factors Model")
## adjust x-axis label to be monthly if zoomed
if(diff(time.rng) < 100){
axis.Date(1, at=seq(cleaned_data$datetime[time.rng[1]],cleaned_data$datetime[time.rng[2]],by='month'), format = "%Y-%m")
}
ecoforecastR::ciEnvelope(cleaned_data$datetime,ci[1,],ci[3,],col=ecoforecastR::col.alpha("lightBlue",0.75))
points(cleaned_data$datetime, cleaned_data$chla,pch="+",cex=0.5)
cor(as.matrix(params_internal))
## betaIntercept betaX betadaily_pH betadaily_turbidity
## betaIntercept 1.00000000 -0.35213831 -0.84742258 0.251640769
## betaX -0.35213831 1.00000000 0.02876006 -0.130903265
## betadaily_pH -0.84742258 0.02876006 1.00000000 -0.165008656
## betadaily_turbidity 0.25164077 -0.13090326 -0.16500866 1.000000000
## betaoxygen -0.49855553 0.48112986 -0.02476056 -0.207375133
## tau_add -0.04806632 0.17493661 -0.01301224 0.009374721
## tau_obs -0.05833468 -0.17823221 0.12928302 -0.032728856
## betaoxygen tau_add tau_obs
## betaIntercept -0.49855553 -0.048066317 -0.05833468
## betaX 0.48112986 0.174936612 -0.17823221
## betadaily_pH -0.02476056 -0.013012238 0.12928302
## betadaily_turbidity -0.20737513 0.009374721 -0.03272886
## betaoxygen 1.00000000 0.086840553 -0.07082428
## tau_add 0.08684055 1.000000000 -0.56427058
## tau_obs -0.07082428 -0.564270581 1.00000000
Data model: \[ Y_t \sim N(X_t, \tau_\text{obs}) \]
Process model: \[ X_{t+1} \sim N(\beta_0 + \beta_{\text{temp}} Z_{\text{temp}, t} + \beta_{lr}Z_{lr, t} + \beta_{sr}Z_{sr, t} + \beta_{\text{prec}} Z_{\text{prec}, t}, \tau_\text{add}) \]
Priors: \[ X_1 \sim N(X_{ic}, \tau_{ic}) \] \[ \tau_\text{obs} \sim \text{Gamma}(a_\text{obs}, r_\text{obs}) \] \[ \tau_\text{add} \sim \text{Gamma}(a_\text{add}, r_\text{add}) \]
# Set up data object (with NA handling built-in)
data_ext <- list(
y = cleaned_data$chla,
n = length(cleaned_data$chla),
temp = cleaned_data$air_temperature,
longrad = cleaned_data$surface_downwelling_longwave_flux_in_air,
shortrad = cleaned_data$surface_downwelling_shortwave_flux_in_air,
precip = cleaned_data$precipitation_flux,
x_ic = 1, tau_ic = 100,
a_obs = 1, r_obs = 1,
a_add = 1, r_add = 1
)
# Fit the model — this version has no X term
model_ext <- "~ 1 + temp + longrad + shortrad + precip"
ef.out.external <- ecoforecastR::fit_dlm(
model = list(obs = "y", fixed = model_ext),
data = data_ext
)
## [1] " \n model{\n \n #### Priors\n x[1] ~ dnorm(x_ic,tau_ic)\n tau_obs ~ dgamma(a_obs,r_obs)\n tau_add ~ dgamma(a_add,r_add)\n\n #### Random Effects\n #RANDOM tau_alpha~dgamma(0.1,0.1)\n #RANDOM for(i in 1:nrep){ \n #RANDOM alpha[i]~dnorm(0,tau_alpha)\n #RANDOM }\n\n #### Fixed Effects\n betaIntercept~dnorm(0,0.001)\n betatemp~dnorm(0,0.001)\n betalongrad~dnorm(0,0.001)\n betashortrad~dnorm(0,0.001)\n betaprecip~dnorm(0,0.001)\n for(j in 1: 5 ){\n muXf[j] ~ dnorm(0,0.001)\n tauXf[j] ~ dgamma(0.01,0.01)\n }\n\n \n #### Data Model\n for(t in 1:n){\n OBS[t] ~ dnorm(x[t],tau_obs)\n Xf[t,1] ~ dnorm(muXf[1],tauXf[1])\nXf[t,2] ~ dnorm(muXf[2],tauXf[2])\nXf[t,3] ~ dnorm(muXf[3],tauXf[3])\nXf[t,4] ~ dnorm(muXf[4],tauXf[4])\nXf[t,5] ~ dnorm(muXf[5],tauXf[5])\n }\n \n #### Process Model\n for(t in 2:n){\n mu[t] <- x[t-1] + betaIntercept*Xf[t,1] + betatemp*Xf[t,2] + betalongrad*Xf[t,3] + betashortrad*Xf[t,4] + betaprecip*Xf[t,5]\n x[t]~dnorm(mu[t],tau_add)\n }\n\n }"
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 11444
## Unobserved stochastic nodes: 7774
## Total graph size: 32544
##
## Initializing model
# Optional: save results
# save(ef.out.external, file = "external_factors.RData")
# Discard burn-in
params_external <- window(ef.out.external$params, start = 1000)
# Plot and summarize
plot(params_external)
summary(params_external)
##
## Iterations = 1000:5000
## Thinning interval = 1
## Number of chains = 3
## Sample size per chain = 4001
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## betaIntercept 1.551e-01 4.483e-01 4.092e-03 8.312e-02
## betalongrad 6.563e-05 4.564e-04 4.166e-06 1.028e-04
## betaprecip 5.296e-01 3.962e-01 3.616e-03 3.141e-02
## betashortrad 5.866e-05 2.295e-04 2.095e-06 1.486e-05
## betatemp -7.026e-04 1.703e-03 1.555e-05 3.197e-04
## tau_add 4.022e+00 1.944e-01 1.775e-03 1.095e-02
## tau_obs 4.772e+01 1.292e+01 1.179e-01 1.378e+00
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## betaIntercept -0.6604318 -1.520e-01 1.906e-01 4.905e-01 8.757e-01
## betalongrad -0.0007384 -2.743e-04 6.274e-05 3.843e-04 9.951e-04
## betaprecip -0.2362009 2.613e-01 5.269e-01 7.919e-01 1.321e+00
## betashortrad -0.0003785 -9.814e-05 5.834e-05 2.074e-04 5.250e-04
## betatemp -0.0036246 -1.891e-03 -8.767e-04 4.968e-04 2.390e-03
## tau_add 3.6547707 3.889e+00 4.016e+00 4.150e+00 4.424e+00
## tau_obs 30.4049847 3.894e+01 4.511e+01 5.315e+01 8.258e+01
cor(as.matrix(params_external))
## betaIntercept betalongrad betaprecip betashortrad betatemp
## betaIntercept 1.00000000 0.10232422 0.084848432 0.267670131 -0.95554626
## betalongrad 0.10232422 1.00000000 -0.618305025 -0.341547586 -0.37659664
## betaprecip 0.08484843 -0.61830503 1.000000000 0.335500029 0.07439579
## betashortrad 0.26767013 -0.34154759 0.335500029 1.000000000 -0.22980139
## betatemp -0.95554626 -0.37659664 0.074395786 -0.229801392 1.00000000
## tau_add 0.04849808 0.03022940 -0.001795059 0.002471215 -0.05365242
## tau_obs -0.07915157 -0.06894172 0.068226147 0.008623784 0.09112738
## tau_add tau_obs
## betaIntercept 0.048498085 -0.079151571
## betalongrad 0.030229396 -0.068941723
## betaprecip -0.001795059 0.068226147
## betashortrad 0.002471215 0.008623784
## betatemp -0.053652422 0.091127383
## tau_add 1.000000000 -0.621353110
## tau_obs -0.621353110 1.000000000
pairs(as.matrix(params_external))
## confidence interval
time.rng = c(1, nrow(cleaned_data)) ## you can adjust this line to zoom in and out on specific time intervals
# Get posterior predictions from the external factors model
out_ext <- as.matrix(ef.out.external$predict)
ci_ext <- apply(out_ext, 2, quantile, c(0.025, 0.5, 0.975))
# Plot the time series with confidence intervals
plot(cleaned_data$datetime, ci_ext[2,], type='n', ylim=range(cleaned_data$chla, na.rm=TRUE),
#log='y',
xlim=cleaned_data$datetime[time.rng],
xlab = "Time",
ylab = "Chlorophyll-a",
main = "External Factors Model")
# Adjust x-axis label to be monthly if zoomed
if(diff(time.rng) < 100){
axis.Date(1, at=seq(cleaned_data$datetime[time.rng[1]], cleaned_data$datetime[time.rng[2]], by='month'), format = "%Y-%m")
}
# Add the confidence envelope
ecoforecastR::ciEnvelope(cleaned_data$datetime, ci_ext[1,], ci_ext[3,], col=ecoforecastR::col.alpha("lightBlue", 0.75))
# Plot the original data points
points(cleaned_data$datetime, cleaned_data$chla, pch="+", cex=0.5)
Data model: \[ Y_t \sim N(X_t, \tau_\text{obs}) \]
Process model: \[ X_{t+1} \sim N(X_t + \beta_0 + \beta_{\text{temp}} Z_{\text{temp}, t} + \beta_{\text{prec}} Z_{\text{prec}, t} + \beta_X X_t, \tau_\text{add}) \]
Priors: \[ X_1 \sim N(X_{ic}, \tau_{ic}) \] \[ \tau_\text{obs} \sim \text{Gamma}(a_\text{obs}, r_\text{obs}) \] \[ \tau_\text{add} \sim \text{Gamma}(a_\text{add}, r_\text{add}) \]
This model can be re-fit by sourcing the script “03_fit_combined_model.R”
load("combined_factors.RData")
gelman.diag(ef.out.combined$params)
## Potential scale reduction factors:
##
## Point est. Upper C.I.
## betaIntercept 1.00 1.01
## betaX 1.03 1.07
## betaprecip 1.00 1.00
## betatemp 1.01 1.02
## tau_add 1.01 1.03
## tau_obs 1.01 1.03
##
## Multivariate psrf
##
## 1.02
BGR <- gelman.plot(ef.out.combined$params)
Set burn in to 5000.
# Load in params with removed burnin
load("combined_factors_noburnin.RData")
## parameter diagnostics
plot(params)
summary(params)
##
## Iterations = 5000:40000
## Thinning interval = 1
## Number of chains = 3
## Sample size per chain = 35001
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## betaIntercept -0.090603 0.059994 1.851e-04 1.707e-03
## betaX -0.062484 0.007747 2.391e-05 4.277e-04
## betaprecip 0.687156 0.324465 1.001e-03 3.747e-03
## betatemp 0.008641 0.002727 8.415e-06 8.387e-05
## tau_add 3.971904 0.173234 5.346e-04 3.669e-03
## tau_obs 62.529297 16.134241 4.979e-02 5.594e-01
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## betaIntercept -0.207433 -0.131141 -0.091129 -0.05009 0.02757
## betaX -0.078010 -0.067597 -0.062428 -0.05728 -0.04733
## betaprecip 0.047165 0.468013 0.688932 0.90562 1.32021
## betatemp 0.003293 0.006779 0.008671 0.01049 0.01394
## tau_add 3.655644 3.852210 3.963674 4.08227 4.33514
## tau_obs 37.510097 51.086828 60.460585 71.58858 99.54893
cor(as.matrix(params))
## betaIntercept betaX betaprecip betatemp tau_add
## betaIntercept 1.00000000 0.1380906 0.205634106 -0.94777660 0.01452692
## betaX 0.13809061 1.0000000 -0.105042682 -0.37326215 0.20644109
## betaprecip 0.20563411 -0.1050427 1.000000000 -0.29629568 0.03326674
## betatemp -0.94777660 -0.3732622 -0.296295677 1.00000000 -0.07262364
## tau_add 0.01452692 0.2064411 0.033266736 -0.07262364 1.00000000
## tau_obs -0.04872104 -0.2390540 -0.007783404 0.10820673 -0.54775814
## tau_obs
## betaIntercept -0.048721043
## betaX -0.239053966
## betaprecip -0.007783404
## betatemp 0.108206726
## tau_add -0.547758141
## tau_obs 1.000000000
pairs(as.matrix(params))
## confidence interval
time.rng = c(1,nrow(cleaned_data)) ## you can adjust this line to zoom in and out on specific time intervals
out <- as.matrix(predict)
ci <- apply(out,2,quantile,c(0.025,0.5,0.975))
plot(cleaned_data$datetime,ci[2,],type='n',ylim=range(cleaned_data$chla,na.rm=TRUE),
#log='y',
xlim=cleaned_data$datetime[time.rng],
xlab = "Time",
ylab = "Chlorophyll-a",
main = "Combined Factors Model")
## adjust x-axis label to be monthly if zoomed
if(diff(time.rng) < 100){
axis.Date(1, at=seq(cleaned_data$datetime[time.rng[1]],cleaned_data$datetime[time.rng[2]],by='month'), format = "%Y-%m")
}
ecoforecastR::ciEnvelope(cleaned_data$datetime,ci[1,],ci[3,],col=ecoforecastR::col.alpha("lightBlue",0.75))
points(cleaned_data$datetime, cleaned_data$chla,pch="+",cex=0.5)